diff --git a/swh/web/assets/src/bundles/browse/origin-search.js b/swh/web/assets/src/bundles/browse/origin-search.js
index e4a0ba9c..f4ad8944 100644
--- a/swh/web/assets/src/bundles/browse/origin-search.js
+++ b/swh/web/assets/src/bundles/browse/origin-search.js
@@ -1,164 +1,164 @@
/**
* Copyright (C) 2018 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
import {heapsPermute} from 'utils/heaps-permute';
import {handleFetchError} from 'utils/functions';
let originPatterns;
let perPage = 20;
let limit = perPage * 10;
let offset = 0;
let currentData = null;
let inSearch = false;
function fixTableRowsStyle() {
setTimeout(() => {
$('#origin-search-results tbody tr').removeAttr('style');
});
}
function clearOriginSearchResultsTable() {
$('#origin-search-results tbody tr').remove();
}
function populateOriginSearchResultsTable(data, offset) {
let localOffset = offset % limit;
if (data.length > 0) {
$('#swh-origin-search-results').show();
$('#swh-no-origins-found').hide();
clearOriginSearchResultsTable();
let table = $('#origin-search-results tbody');
for (let i = localOffset; i < localOffset + perPage && i < data.length; ++i) {
let elem = data[i];
let tableRow = '
';
tableRow += '' + elem.type + ' | ';
- let browseUrl = Urls.browse_origin(elem.type, elem.url);
+ let browseUrl = Urls.browse_origin(elem.url);
tableRow += '' + browseUrl + ' | ';
tableRow += ' | ';
tableRow += '
';
table.append(tableRow);
// get async latest visit snapshot and update visit status icon
let latestSnapshotUrl = Urls.browse_origin_latest_snapshot(elem.id);
fetch(latestSnapshotUrl, {credentials: 'same-origin'})
.then(response => response.json())
.then(data => {
let originId = elem.id;
$('#visit-status-origin-' + originId).children().remove();
if (data) {
$('#visit-status-origin-' + originId).append('');
} else {
$('#visit-status-origin-' + originId).append('');
}
});
}
fixTableRowsStyle();
} else {
$('#swh-origin-search-results').hide();
$('#swh-no-origins-found').show();
}
if (data.length - localOffset < perPage ||
(data.length < limit && (localOffset + perPage) === data.length)) {
$('#origins-next-results-button').addClass('disabled');
} else {
$('#origins-next-results-button').removeClass('disabled');
}
if (offset > 0) {
$('#origins-prev-results-button').removeClass('disabled');
} else {
$('#origins-prev-results-button').addClass('disabled');
}
inSearch = false;
setTimeout(() => {
window.scrollTo(0, 0);
});
}
function searchOrigins(patterns, limit, searchOffset, offset) {
originPatterns = patterns;
let patternsArray = patterns.trim().replace(/\s+/g, ' ').split(' ');
let patternsPermut = [];
heapsPermute(patternsArray, p => patternsPermut.push(p.join('.*')));
let regex = patternsPermut.join('|');
let searchUrl = Urls.browse_origin_search(regex) + `?limit=${limit}&offset=${searchOffset}®exp=true`;
clearOriginSearchResultsTable();
$('.swh-loading').addClass('show');
fetch(searchUrl, {credentials: 'same-origin'})
.then(handleFetchError)
.then(response => response.json())
.then(data => {
currentData = data;
if (typeof Storage !== 'undefined') {
sessionStorage.setItem('last-swh-origin-url-patterns', patterns);
sessionStorage.setItem('last-swh-origin-search-results', JSON.stringify(data));
sessionStorage.setItem('last-swh-origin-search-offset', offset);
}
$('.swh-loading').removeClass('show');
populateOriginSearchResultsTable(data, offset);
})
.catch(() => {
$('.swh-loading').removeClass('show');
inSearch = false;
});
}
export function initOriginSearch() {
$(document).ready(() => {
if (typeof Storage !== 'undefined') {
originPatterns = sessionStorage.getItem('last-swh-origin-url-patterns');
let data = sessionStorage.getItem('last-swh-origin-search-results');
offset = sessionStorage.getItem('last-swh-origin-search-offset');
if (data) {
$('#origins-url-patterns').val(originPatterns);
offset = parseInt(offset);
populateOriginSearchResultsTable(JSON.parse(data), offset);
}
}
$('#search_origins').submit(event => {
let patterns = $('#origins-url-patterns').val();
offset = 0;
inSearch = true;
searchOrigins(patterns, limit, offset, offset);
event.preventDefault();
});
$('#origins-next-results-button').click(event => {
if ($('#origins-next-results-button').hasClass('disabled') || inSearch) {
return;
}
inSearch = true;
offset += perPage;
if (!currentData || offset % limit === 0) {
searchOrigins(originPatterns, limit, offset, offset);
} else {
populateOriginSearchResultsTable(currentData, offset);
}
event.preventDefault();
});
$('#origins-prev-results-button').click(event => {
if ($('#origins-prev-results-button').hasClass('disabled') || inSearch) {
return;
}
inSearch = true;
offset -= perPage;
if (!currentData || (offset > 0 && (offset + perPage) % limit === 0)) {
searchOrigins(originPatterns, limit, (offset + perPage) - limit, offset);
} else {
populateOriginSearchResultsTable(currentData, offset);
}
event.preventDefault();
});
$(document).on('shown.bs.tab', 'a[data-toggle="tab"]', e => {
if (e.currentTarget.text.trim() === 'Search') {
fixTableRowsStyle();
}
});
});
}
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index 6a91cb96..ec26ad3a 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,998 +1,998 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
import magic
import math
import pypandoc
import stat
from django.core.cache import cache
from django.utils.safestring import mark_safe
from importlib import reload
from swh.web.common import highlightjs, service
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, parse_timestamp,
get_origin_visits
)
from swh.web.config import get_config
def get_directory_entries(sha1_git):
"""Function that retrieves the content of a SWH directory
from the SWH archive.
The directories entries are first sorted in lexicographical order.
Sub-directories and regular files are then extracted.
Args:
sha1_git: sha1_git identifier of the directory
Returns:
A tuple whose first member corresponds to the sub-directories list
and second member the regular files list
Raises:
NotFoundExc if the directory is not found
"""
cache_entry_id = 'directory_entries_%s' % sha1_git
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
entries = list(service.lookup_directory(sha1_git))
entries = sorted(entries, key=lambda e: e['name'])
for entry in entries:
entry['perms'] = stat.filemode(entry['perms'])
dirs = [e for e in entries if e['type'] == 'dir']
files = [e for e in entries if e['type'] == 'file']
cache.set(cache_entry_id, (dirs, files))
return dirs, files
def get_mimetype_and_encoding_for_content(content):
"""Function that returns the mime type and the encoding associated to
a content buffer using the magic module under the hood.
Args:
content (bytes): a content buffer
Returns:
A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
associated to the provided content.
"""
while True:
try:
magic_result = magic.detect_from_content(content)
mime_type = magic_result.mime_type
encoding = magic_result.encoding
break
except Exception as exc:
# workaround an issue with the magic module who can fail
# if detect_from_content is called multiple times in
# a short amount of time
reload(magic)
return mime_type, encoding
# maximum authorized content size in bytes for HTML display
# with code highlighting
content_display_max_size = get_config()['content_display_max_size']
def request_content(query_string, max_size=content_display_max_size):
"""Function that retrieves a SWH content from the SWH archive.
Raw bytes content is first retrieved, then the content mime type.
If the mime type is not stored in the archive, it will be computed
using Python magic module.
Args:
query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*,
or *blake2s256* (default to *sha1*) and HASH the hexadecimal
representation of the hash value
max_size: the maximum size for a content to retrieve (default to 1MB,
no size limit if None)
Returns:
A tuple whose first member corresponds to the content raw bytes
and second member the content mime type
Raises:
NotFoundExc if the content is not found
"""
content_data = service.lookup_content(query_string)
filetype = None
language = None
license = None
# requests to the indexer db may fail so properly handle
# those cases in order to avoid content display errors
try:
filetype = service.lookup_content_filetype(query_string)
language = service.lookup_content_language(query_string)
license = service.lookup_content_license(query_string)
except Exception as e:
pass
mimetype = 'unknown'
encoding = 'unknown'
if filetype:
mimetype = filetype['mimetype']
encoding = filetype['encoding']
if not max_size or content_data['length'] < max_size:
content_raw = service.lookup_content_raw(query_string)
content_data['raw_data'] = content_raw['data']
if not filetype:
mimetype, encoding = \
get_mimetype_and_encoding_for_content(content_data['raw_data'])
# encode textual content to utf-8 if needed
if mimetype.startswith('text/'):
# probably a malformed UTF-8 content, reencode it
# by replacing invalid chars with a substitution one
if encoding == 'unknown-8bit':
content_data['raw_data'] = \
content_data['raw_data'].decode('utf-8', 'replace')\
.encode('utf-8')
elif 'ascii' not in encoding and encoding not in ['utf-8', 'binary']: # noqa
content_data['raw_data'] = \
content_data['raw_data'].decode(encoding, 'replace')\
.encode('utf-8')
else:
content_data['raw_data'] = None
content_data['mimetype'] = mimetype
content_data['encoding'] = encoding
if language:
content_data['language'] = language['lang']
else:
content_data['language'] = 'not detected'
if license:
content_data['licenses'] = ', '.join(license['licenses'])
else:
content_data['licenses'] = 'not detected'
return content_data
_browsers_supported_image_mimes = set(['image/gif', 'image/png',
'image/jpeg', 'image/bmp',
'image/webp'])
def prepare_content_for_display(content_data, mime_type, path):
"""Function that prepares a content for HTML display.
The function tries to associate a programming language to a
content in order to perform syntax highlighting client-side
using highlightjs. The language is determined using either
the content filename or its mime type.
If the mime type corresponds to an image format supported
by web browsers, the content will be encoded in base64
for displaying the image.
Args:
content_data (bytes): raw bytes of the content
mime_type (string): mime type of the content
path (string): path of the content including filename
Returns:
A dict containing the content bytes (possibly different from the one
provided as parameter if it is an image) under the key 'content_data
and the corresponding highlightjs language class under the
key 'language'.
"""
language = highlightjs.get_hljs_language_from_filename(path)
if not language:
language = highlightjs.get_hljs_language_from_mime_type(mime_type)
if not language:
language = 'nohighlight-swh'
elif mime_type.startswith('application/'):
mime_type = mime_type.replace('application/', 'text/')
if mime_type.startswith('image/'):
if mime_type in _browsers_supported_image_mimes:
content_data = base64.b64encode(content_data)
else:
content_data = None
return {'content_data': content_data,
'language': language}
def get_origin_visit(origin_info, visit_ts=None, visit_id=None,
snapshot_id=None):
"""Function that returns information about a SWH visit for
a given origin.
The visit is retrieved from a provided timestamp.
The closest visit from that timestamp is selected.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
Returns:
A dict containing the visit info as described below::
{'origin': 2,
'date': '2017-10-08T11:54:25.582463+00:00',
'metadata': {},
'visit': 25,
'status': 'full'}
"""
visits = get_origin_visits(origin_info)
if not visits:
raise NotFoundExc('No SWH visit associated to origin with'
' type %s and url %s!' % (origin_info['type'],
origin_info['url']))
if snapshot_id:
visit = [v for v in visits if v['snapshot'] == snapshot_id]
if len(visit) == 0:
raise NotFoundExc(
'Visit for snapshot with id %s for origin with type %s'
' and url %s not found!' % (snapshot_id, origin_info['type'],
origin_info['url']))
return visit[0]
if visit_id:
visit = [v for v in visits if v['visit'] == int(visit_id)]
if len(visit) == 0:
raise NotFoundExc(
'Visit with id %s for origin with type %s'
' and url %s not found!' % (visit_id, origin_info['type'],
origin_info['url']))
return visit[0]
if not visit_ts:
# returns the latest full visit when no timestamp is provided
for v in reversed(visits):
if v['status'] == 'full':
return v
return visits[-1]
parsed_visit_ts = math.floor(parse_timestamp(visit_ts).timestamp())
visit_idx = None
for i, visit in enumerate(visits):
ts = math.floor(parse_timestamp(visit['date']).timestamp())
if i == 0 and parsed_visit_ts <= ts:
return visit
elif i == len(visits) - 1:
if parsed_visit_ts >= ts:
return visit
else:
next_ts = math.floor(
parse_timestamp(visits[i+1]['date']).timestamp())
if parsed_visit_ts >= ts and parsed_visit_ts < next_ts:
if (parsed_visit_ts - ts) < (next_ts - parsed_visit_ts):
visit_idx = i
break
else:
visit_idx = i+1
break
if visit_idx:
visit = visits[visit_idx]
while visit_idx < len(visits) - 1 and \
visit['date'] == visits[visit_idx+1]['date']:
visit_idx = visit_idx + 1
visit = visits[visit_idx]
return visit
else:
raise NotFoundExc(
'Visit with timestamp %s for origin with type %s and url %s not found!' % # noqa
(visit_ts, origin_info['type'], origin_info['url']))
def get_snapshot_content(snapshot_id):
"""Returns the lists of branches and releases
associated to a swh snapshot.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
Args:
snapshot_id (str): hexadecimal representation of the snapshot
identifier
Returns:
A tuple with two members. The first one is a list of dict describing
the snapshot branches. The second one is a list of dict describing the
snapshot releases.
Raises:
NotFoundExc if the snapshot does not exist
"""
cache_entry_id = 'swh_snapshot_%s' % snapshot_id
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry['branches'], cache_entry['releases']
branches = []
releases = []
if snapshot_id:
revision_ids = []
releases_ids = []
snapshot = service.lookup_snapshot(snapshot_id)
snapshot_branches = snapshot['branches']
for key in sorted(snapshot_branches.keys()):
if not snapshot_branches[key]:
continue
if snapshot_branches[key]['target_type'] == 'revision':
branches.append({'name': key,
'revision': snapshot_branches[key]['target']})
revision_ids.append(snapshot_branches[key]['target'])
elif snapshot_branches[key]['target_type'] == 'release':
releases_ids.append(snapshot_branches[key]['target'])
releases_info = service.lookup_release_multiple(releases_ids)
for release in releases_info:
releases.append({'name': release['name'],
'date': format_utc_iso_date(release['date']),
'id': release['id'],
'message': release['message'],
'target_type': release['target_type'],
'target': release['target']})
revision_ids.append(release['target'])
revisions = service.lookup_revision_multiple(revision_ids)
branches_to_remove = []
for idx, revision in enumerate(revisions):
if idx < len(branches):
if revision:
branches[idx]['directory'] = revision['directory']
branches[idx]['date'] = format_utc_iso_date(revision['date']) # noqa
branches[idx]['message'] = revision['message']
else:
branches_to_remove.append(branches[idx])
else:
rel_idx = idx - len(branches)
if revision:
releases[rel_idx]['directory'] = revision['directory']
for b in branches_to_remove:
branches.remove(b)
cache.set(cache_entry_id, {'branches': branches, 'releases': releases})
return branches, releases
def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None,
snapshot_id=None):
"""Returns the lists of branches and releases
associated to a swh origin for a given visit.
The visit is expressed by a timestamp. In the latter case,
the closest visit from the provided timestamp will be used.
If no visit parameter is provided, it returns the list of branches
found for the latest visit.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
visit_id (int): optional visit id for desambiguation in case
several visits have the same timestamp
Returns:
A tuple with two members. The first one is a list of dict describing
the origin branches for the given visit.
The second one is a list of dict describing the origin releases
for the given visit.
Raises:
NotFoundExc if the origin or its visit are not found
"""
visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id)
return get_snapshot_content(visit_info['snapshot'])
def gen_link(url, link_text, link_attrs={}):
"""
Utility function for generating an HTML link to insert
in Django templates.
Args:
url (str): an url
link_text (str): the text for the produced link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
attrs = ' '
for k, v in link_attrs.items():
attrs += '%s="%s" ' % (k, v)
link = '%s' % (attrs, url, link_text)
return mark_safe(link)
def gen_person_link(person_id, person_name, snapshot_context=None,
link_attrs={}):
"""
Utility function for generating a link to a SWH person HTML view
to insert in Django templates.
Args:
person_id (int): a SWH person id
person_name (str): the associated person name
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'person_name'
"""
query_params = None
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
query_params = {'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']}
+ 'origin': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
query_params['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
elif snapshot_context:
query_params = {'snapshot_id': snapshot_context['snapshot_id']}
person_url = reverse('browse-person', kwargs={'person_id': person_id},
query_params=query_params)
return gen_link(person_url, person_name, link_attrs)
def gen_revision_link(revision_id, shorten_id=False, snapshot_context=None,
link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH revision HTML view
to insert in Django templates.
Args:
revision_id (str): a SWH revision id
shorten_id (boolean): wheter to shorten the revision id to 7
characters for the link text
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'revision_id'
"""
query_params = None
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
query_params = {'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']}
+ 'origin': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
query_params['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
elif snapshot_context:
query_params = {'snapshot_id': snapshot_context['snapshot_id']}
revision_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id},
query_params=query_params)
if shorten_id:
return gen_link(revision_url, revision_id[:7], link_attrs)
else:
if not link_text:
link_text = revision_id
return gen_link(revision_url, link_text, link_attrs)
def gen_origin_link(origin_info, link_attrs={}):
"""
Utility function for generating a link to a SWH origin HTML view
to insert in Django templates.
Args:
origin_info (dict): a dicted filled with origin information
(id, type, url)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'Origin: origin_url'
""" # noqa
origin_browse_url = reverse('browse-origin',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
return gen_link(origin_browse_url,
'Origin: ' + origin_info['url'], link_attrs)
def gen_directory_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
to insert in Django templates.
Args:
sha1_git (str): directory identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
directory_url = reverse('browse-directory',
kwargs={'sha1_git': sha1_git})
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_snapshot_link(snapshot_id, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH snapshot HTML view
to insert in Django templates.
Args:
snapshot_id (str): snapshot identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
snapshot_url = reverse('browse-snapshot',
kwargs={'snapshot_id': snapshot_id})
if not link_text:
link_text = snapshot_url
return gen_link(snapshot_url, link_text, link_attrs)
def gen_snapshot_directory_link(snapshot_context, revision_id=None,
link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
in the context of a snapshot to insert in Django templates.
Args:
snapshot_context (dict): the snapshot information
revision_id (str): optional revision identifier in order
to use the associated directory
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'origin_directory_view_url'
"""
query_params = {'revision': revision_id}
if snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
url_args['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
directory_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
else:
url_args = {'snapshot_id': snapshot_context['snapshot_id']}
directory_url = reverse('browse-snapshot-directory',
kwargs=url_args,
query_params=query_params)
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_content_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH content HTML view
to insert in Django templates.
Args:
sha1_git (str): content identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
content_url = reverse('browse-content',
kwargs={'query_string': 'sha1_git:' + sha1_git})
if not link_text:
link_text = content_url
return gen_link(content_url, link_text, link_attrs)
def get_revision_log_url(revision_id, snapshot_context=None):
"""
Utility function for getting the URL for a SWH revision log HTML view
(possibly in the context of an origin).
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
Returns:
The SWH revision log view URL
"""
query_params = {'revision': revision_id}
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
url_args['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
revision_log_url = reverse('browse-origin-log',
kwargs=url_args,
query_params=query_params)
elif snapshot_context:
url_args = {'snapshot_id': snapshot_context['snapshot_id']}
revision_log_url = reverse('browse-snapshot-log',
kwargs=url_args,
query_params=query_params)
else:
revision_log_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id})
return revision_log_url
def gen_revision_log_link(revision_id, snapshot_context=None, link_text=None,
link_attrs={}):
"""
Utility function for generating a link to a SWH revision log HTML view
(possibly in the context of an origin) to insert in Django templates.
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'link_text'
"""
revision_log_url = get_revision_log_url(revision_id, snapshot_context)
if not link_text:
link_text = revision_log_url
return gen_link(revision_log_url, link_text, link_attrs)
def _format_log_entries(revision_log, per_page, snapshot_context=None):
revision_log_data = []
for i, log in enumerate(revision_log):
if i == per_page:
break
revision_log_data.append(
{'author': gen_person_link(log['author']['id'],
log['author']['name'],
snapshot_context),
'revision': gen_revision_link(log['id'], True, snapshot_context),
'message': log['message'],
'date': format_utc_iso_date(log['date']),
'directory': log['directory']})
return revision_log_data
def prepare_revision_log_for_display(revision_log, per_page, revs_breadcrumb,
snapshot_context=None):
"""
Utility functions that process raw revision log data for HTML display.
Its purpose is to:
* add links to relevant SWH browse views
* format date in human readable format
* truncate the message log
It also computes the data needed to generate the links for navigating back
and forth in the history log.
Args:
revision_log (list): raw revision log as returned by the SWH web api
per_page (int): number of log entries per page
revs_breadcrumb (str): breadcrumbs of revisions navigated so far,
in the form 'rev1[/rev2/../revN]'. Each revision corresponds to
the first one displayed in the HTML view for history log.
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
"""
current_rev = revision_log[0]['id']
next_rev = None
prev_rev = None
next_revs_breadcrumb = None
prev_revs_breadcrumb = None
if len(revision_log) == per_page + 1:
prev_rev = revision_log[-1]['id']
prev_rev_bc = current_rev
if snapshot_context:
prev_rev_bc = prev_rev
if revs_breadcrumb:
revs = revs_breadcrumb.split('/')
next_rev = revs[-1]
if len(revs) > 1:
next_revs_breadcrumb = '/'.join(revs[:-1])
if len(revision_log) == per_page + 1:
prev_revs_breadcrumb = revs_breadcrumb + '/' + prev_rev_bc
else:
prev_revs_breadcrumb = prev_rev_bc
return {'revision_log_data': _format_log_entries(revision_log, per_page,
snapshot_context),
'prev_rev': prev_rev,
'prev_revs_breadcrumb': prev_revs_breadcrumb,
'next_rev': next_rev,
'next_revs_breadcrumb': next_revs_breadcrumb}
# list of origin types that can be found in the swh archive
# TODO: retrieve it dynamically in an efficient way instead
# of hardcoding it
_swh_origin_types = ['git', 'svn', 'deb', 'hg', 'ftp', 'deposit']
def get_origin_info(origin_url, origin_type=None):
"""
Get info about a SWH origin.
Its main purpose is to automatically find an origin type
when it is not provided as parameter.
Args:
origin_url (str): complete url of a SWH origin
origin_type (str): optionnal origin type
Returns:
A dict with the following entries:
* type: the origin type
* url: the origin url
* id: the SWH internal id of the origin
"""
if origin_type:
return service.lookup_origin({'type': origin_type,
'url': origin_url})
else:
for origin_type in _swh_origin_types:
try:
origin_info = service.lookup_origin({'type': origin_type,
'url': origin_url})
return origin_info
except Exception:
pass
return None
def get_snapshot_context(snapshot_id=None, origin_type=None, origin_url=None,
timestamp=None, visit_id=None):
"""
Utility function to compute relevant information when navigating
the SWH archive in a snapshot context. The snapshot is either
referenced by its id or it will be retrieved from an origin visit.
Args:
snapshot_id (str): hexadecimal representation of a snapshot identifier,
all other parameters will be ignored if it is provided
origin_type (str): the origin type (git, svn, deposit, ...)
origin_url (str): the origin_url (e.g. https://github.com/(user)/(repo)/)
timestamp (str): a datetime string for retrieving the closest
SWH visit of the origin
visit_id (int): optional visit id for disambiguation in case
of several visits with the same timestamp
Returns:
A dict with the following entries:
* origin_info: dict containing origin information
* visit_info: dict containing SWH visit information
* branches: the list of branches for the origin found
during the visit
* releases: the list of releases for the origin found
during the visit
* origin_browse_url: the url to browse the origin
* origin_branches_url: the url to browse the origin branches
* origin_releases_url': the url to browse the origin releases
* origin_visit_url: the url to browse the snapshot of the origin
found during the visit
* url_args: dict containg url arguments to use when browsing in
the context of the origin and its visit
""" # noqa
origin_info = None
visit_info = None
url_args = None
query_params = {}
branches = []
releases = []
browse_url = None
visit_url = None
branches_url = None
releases_url = None
swh_type = 'snapshot'
if origin_url:
swh_type = 'origin'
origin_info = get_origin_info(origin_url, origin_type)
origin_info['type'] = origin_type
visit_info = get_origin_visit(origin_info, timestamp, visit_id,
snapshot_id)
visit_info['fmt_date'] = format_utc_iso_date(visit_info['date'])
snapshot_id = visit_info['snapshot']
# provided timestamp is not necessarily equals to the one
# of the retrieved visit, so get the exact one in order
# use it in the urls generated below
if timestamp:
timestamp = visit_info['date']
branches, releases = \
get_origin_visit_snapshot(origin_info, timestamp, visit_id,
snapshot_id)
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
query_params = {'visit_id': visit_id}
browse_url = reverse('browse-origin-visits',
kwargs=url_args)
if timestamp:
url_args['timestamp'] = format_utc_iso_date(timestamp,
'%Y-%m-%dT%H:%M:%S')
visit_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
visit_info['url'] = visit_url
branches_url = reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
releases_url = reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
elif snapshot_id:
branches, releases = get_snapshot_content(snapshot_id)
url_args = {'snapshot_id': snapshot_id}
browse_url = reverse('browse-snapshot',
kwargs=url_args)
branches_url = reverse('browse-snapshot-branches',
kwargs=url_args)
releases_url = reverse('browse-snapshot-releases',
kwargs=url_args)
releases = list(reversed(releases))
return {
'swh_type': swh_type,
'snapshot_id': snapshot_id,
'origin_info': origin_info,
'visit_info': visit_info,
'branches': branches,
'releases': releases,
'branch': None,
'release': None,
'browse_url': browse_url,
'branches_url': branches_url,
'releases_url': releases_url,
'url_args': url_args,
'query_params': query_params
}
# list of common readme names ordered by preference
# (lower indices have higher priority)
_common_readme_names = [
"readme.markdown",
"readme.md",
"readme.rst",
"readme.txt",
"readme"
]
def get_readme_to_display(readmes):
"""
Process a list of readme files found in a directory
in order to find the adequate one to display.
Args:
readmes: a list of dict where keys are readme file names and values
are readme sha1s
Returns:
A tuple (readme_name, readme_sha1)
"""
readme_name = None
readme_url = None
readme_sha1 = None
readme_html = None
lc_readmes = {k.lower(): {'orig_name': k, 'sha1': v}
for k, v in readmes.items()}
# look for readme names according to the preference order
# defined by the _common_readme_names list
for common_readme_name in _common_readme_names:
if common_readme_name in lc_readmes:
readme_name = lc_readmes[common_readme_name]['orig_name']
readme_sha1 = lc_readmes[common_readme_name]['sha1']
readme_url = reverse('browse-content-raw',
kwargs={'query_string': readme_sha1})
break
# otherwise pick the first readme like file if any
if not readme_name and len(readmes.items()) > 0:
readme_name = next(iter(readmes))
readme_sha1 = readmes[readme_name]
readme_url = reverse('browse-content-raw',
kwargs={'query_string': readme_sha1})
# convert rst README to html server side as there is
# no viable solution to perform that task client side
if readme_name and readme_name.endswith('.rst'):
cache_entry_id = 'readme_%s' % readme_sha1
cache_entry = cache.get(cache_entry_id)
if cache_entry:
readme_html = cache_entry
else:
rst_doc = request_content(readme_sha1)
readme_html = pypandoc.convert_text(rst_doc['raw_data'], 'html',
format='rst')
cache.set(cache_entry_id, readme_html)
return readme_name, readme_url, readme_html
diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py
index 5873024c..f313e18e 100644
--- a/swh/web/browse/views/revision.py
+++ b/swh/web/browse/views/revision.py
@@ -1,506 +1,506 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import hashlib
import json
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
from swh.web.common import service
from swh.web.common.utils import (
reverse, format_utc_iso_date, gen_path_info,
get_swh_persistent_id
)
from swh.web.common.exc import handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import (
gen_link, gen_person_link, gen_revision_link,
prepare_revision_log_for_display,
get_snapshot_context, gen_snapshot_directory_link,
get_revision_log_url, get_directory_entries,
gen_directory_link, request_content, prepare_content_for_display,
content_display_max_size, gen_snapshot_link, get_readme_to_display
)
def _gen_content_url(revision, query_string, path, snapshot_context):
if snapshot_context:
url_args = snapshot_context['url_args']
url_args['path'] = path
query_params = snapshot_context['query_params']
query_params['revision'] = revision['id']
content_url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
else:
content_path = '%s/%s' % (revision['directory'], path)
content_url = reverse('browse-content',
kwargs={'query_string': query_string},
query_params={'path': content_path})
return content_url
def _gen_diff_link(idx, diff_anchor, link_text):
if idx < _max_displayed_file_diffs:
return gen_link(diff_anchor, link_text)
else:
return link_text
# TODO: put in conf
_max_displayed_file_diffs = 1000
def _gen_revision_changes_list(revision, changes, snapshot_context):
"""
Returns a HTML string describing the file changes
introduced in a revision.
As this string will be displayed in the browse revision view,
links to adequate file diffs are also generated.
Args:
revision (str): hexadecimal representation of a revision identifier
changes (list): list of file changes in the revision
snapshot_context (dict): optional origin context used to reverse
the content urls
Returns:
A string to insert in a revision HTML view.
"""
changes_msg = []
for i, change in enumerate(changes):
hasher = hashlib.sha1()
from_query_string = ''
to_query_string = ''
diff_id = 'diff-'
if change['from']:
from_query_string = 'sha1_git:' + change['from']['target']
diff_id += change['from']['target'] + '-' + change['from_path']
diff_id += '-'
if change['to']:
to_query_string = 'sha1_git:' + change['to']['target']
diff_id += change['to']['target'] + change['to_path']
change['path'] = change['to_path'] or change['from_path']
url_args = {'from_query_string': from_query_string,
'to_query_string': to_query_string}
query_params = {'path': change['path']}
change['diff_url'] = reverse('diff-contents',
kwargs=url_args,
query_params=query_params)
hasher.update(diff_id.encode('utf-8'))
diff_id = hasher.hexdigest()
change['id'] = diff_id
panel_diff_link = '#panel_' + diff_id
if change['type'] == 'modify':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('modified: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'insert':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('new file: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'delete':
parent = service.lookup_revision(revision['parents'][0])
change['content_url'] = \
_gen_content_url(parent,
from_query_string,
change['from_path'], snapshot_context)
changes_msg.append('deleted: %s' %
_gen_diff_link(i, panel_diff_link,
change['from_path']))
elif change['type'] == 'rename':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
link_text = change['from_path'] + ' → ' + change['to_path']
changes_msg.append('renamed: %s' %
_gen_diff_link(i, panel_diff_link, link_text))
if not changes:
changes_msg.append('No changes')
return mark_safe('\n'.join(changes_msg))
@browse_route(r'revision/(?P[0-9a-f]+)/diff/',
view_name='diff-revision')
def _revision_diff(request, sha1_git):
"""
Browse internal endpoint to compute revision diff
"""
try:
revision = service.lookup_revision(sha1_git)
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
if origin_url:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
except Exception as exc:
return handle_view_exception(request, exc)
changes = service.diff_revision(sha1_git)
changes_msg = _gen_revision_changes_list(revision, changes,
snapshot_context)
diff_data = {
'total_nb_changes': len(changes),
'changes': changes[:_max_displayed_file_diffs],
'changes_msg': changes_msg
}
diff_data_json = json.dumps(diff_data, separators=(',', ': '))
return HttpResponse(diff_data_json, content_type='application/json')
NB_LOG_ENTRIES = 20
@browse_route(r'revision/(?P[0-9a-f]+)/log/',
view_name='browse-revision-log')
def revision_log_browse(request, sha1_git):
"""
Django view that produces an HTML display of the history
log for a SWH revision identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/`.
""" # noqa
try:
per_page = int(request.GET.get('per_page', NB_LOG_ENTRIES))
revision_log = service.lookup_revision_log(sha1_git,
limit=per_page+1)
revision_log = list(revision_log)
except Exception as exc:
return handle_view_exception(request, exc)
revs_breadcrumb = request.GET.get('revs_breadcrumb', None)
revision_log_display_data = prepare_revision_log_for_display(
revision_log, per_page, revs_breadcrumb)
prev_rev = revision_log_display_data['prev_rev']
prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb']
prev_log_url = None
if prev_rev:
prev_log_url = \
reverse('browse-revision-log',
kwargs={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': prev_revs_breadcrumb,
'per_page': per_page})
next_rev = revision_log_display_data['next_rev']
next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb']
next_log_url = None
if next_rev:
next_log_url = \
reverse('browse-revision-log',
kwargs={'sha1_git': next_rev},
query_params={'revs_breadcrumb': next_revs_breadcrumb,
'per_page': per_page})
revision_log_data = revision_log_display_data['revision_log_data']
for log in revision_log_data:
log['directory'] = gen_directory_link(
log['directory'],
link_text=''
'Browse files',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
return render(request, 'revision-log.html',
{'heading': 'Revision history',
'swh_object_name': 'Revision history',
'swh_object_metadata': None,
'revision_log': revision_log_data,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'top_right_link_text': None,
'snapshot_context': None,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': None})
@browse_route(r'revision/(?P[0-9a-f]+)/',
r'revision/(?P[0-9a-f]+)/(?P.+)/',
view_name='browse-revision')
def revision_browse(request, sha1_git, extra_path=None):
"""
Django view that produces an HTML display of a SWH revision
identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/`.
"""
try:
revision = service.lookup_revision(sha1_git)
# some readme files can reference assets reachable from the
# browsed directory, handle that special case in order to
# correctly displayed them
if extra_path:
dir_info = \
service.lookup_directory_with_path(revision['directory'],
extra_path)
if dir_info and dir_info['type'] == 'file':
file_raw_url = reverse(
'browse-content-raw',
kwargs={'query_string': dir_info['checksums']['sha1']})
return redirect(file_raw_url)
origin_info = None
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
snapshot_id = request.GET.get('snapshot_id', None)
path = request.GET.get('path', None)
dir_id = None
dirs, files = None, None
content_data = None
if origin_url:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
origin_info = snapshot_context['origin_info']
snapshot_id = snapshot_context['snapshot_id']
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
if path:
path_info = \
service.lookup_directory_with_path(revision['directory'], path)
if path_info['type'] == 'dir':
dir_id = path_info['target']
else:
query_string = 'sha1_git:' + path_info['target']
content_data = request_content(query_string)
else:
dir_id = revision['directory']
if dir_id:
path = '' if path is None else (path + '/')
dirs, files = get_directory_entries(dir_id)
except Exception as exc:
return handle_view_exception(request, exc)
revision_data = {}
revision_data['author'] = \
gen_person_link(revision['author']['id'], revision['author']['name'],
snapshot_context)
revision_data['committer'] = \
gen_person_link(revision['committer']['id'],
revision['committer']['name'], snapshot_context)
revision_data['committer date'] = format_utc_iso_date(
revision['committer_date'])
revision_data['date'] = format_utc_iso_date(revision['date'])
if snapshot_context:
revision_data['snapshot id'] = snapshot_id
revision_data['directory'] = \
gen_snapshot_directory_link(snapshot_context, sha1_git,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm', # noqa
'role': 'button'})
else:
revision_data['directory'] = \
gen_directory_link(revision['directory'], link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['id'] = sha1_git
revision_data['merge'] = revision['merge']
revision_data['metadata'] = json.dumps(revision['metadata'],
sort_keys=True,
indent=4, separators=(',', ': '))
if origin_info:
revision_data['context-independent revision'] = \
gen_revision_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['origin id'] = origin_info['id']
revision_data['origin type'] = origin_info['type']
revision_data['origin url'] = gen_link(origin_info['url'],
origin_info['url'])
browse_snapshot_link = \
gen_snapshot_link(snapshot_id, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['snapshot'] = browse_snapshot_link
parents = ''
for p in revision['parents']:
parent_link = gen_revision_link(p, snapshot_context=snapshot_context)
parents += parent_link + '
'
revision_data['parents'] = mark_safe(parents)
revision_data['synthetic'] = revision['synthetic']
revision_data['type'] = revision['type']
message_lines = revision['message'].split('\n')
parents_links = '%s parent%s ' % \
(len(revision['parents']),
'' if len(revision['parents']) == 1 else 's')
parents_links += ' '
for p in revision['parents']:
parent_link = gen_revision_link(p, shorten_id=True,
snapshot_context=snapshot_context)
parents_links += parent_link
if p != revision['parents'][-1]:
parents_links += ' + '
path_info = gen_path_info(path)
query_params = {'snapshot_id': snapshot_id,
'origin_type': origin_type,
- 'origin_url': origin_url,
+ 'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id}
breadcrumbs = []
breadcrumbs.append({'name': revision['directory'][:7],
'url': reverse('browse-revision',
kwargs={'sha1_git': sha1_git},
query_params=query_params)})
for pi in path_info:
query_params['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-revision',
kwargs={'sha1_git': sha1_git},
query_params=query_params)})
vault_cooking = {
'directory_context': False,
'directory_id': None,
'revision_context': True,
'revision_id': sha1_git
}
content = None
content_size = None
mimetype = None
language = None
readme_name = None
readme_url = None
readme_html = None
readmes = {}
if content_data:
breadcrumbs[-1]['url'] = None
content_size = content_data['length']
mimetype = content_data['mimetype']
if content_data['raw_data']:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
query_params = {}
if path:
query_params['filename'] = path_info[-1]['name']
top_right_link = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params=query_params)
top_right_link_text = mark_safe(
''
'Raw File')
else:
for d in dirs:
query_params['path'] = path + d['name']
d['url'] = reverse('browse-revision',
kwargs={'sha1_git': sha1_git},
query_params=query_params)
for f in files:
query_params['path'] = path + f['name']
f['url'] = reverse('browse-revision',
kwargs={'sha1_git': sha1_git},
query_params=query_params)
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
top_right_link = get_revision_log_url(sha1_git, snapshot_context)
top_right_link_text = mark_safe(
''
'History')
vault_cooking['directory_context'] = True
vault_cooking['directory_id'] = dir_id
diff_revision_url = reverse('diff-revision', kwargs={'sha1_git': sha1_git},
query_params={'origin_type': origin_type,
- 'origin_url': origin_url,
+ 'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id})
swh_rev_id = get_swh_persistent_id('revision', sha1_git)
show_ids_options = snapshot_context and \
snapshot_context['origin_info'] is not None
swh_ids = [
{
'object_type': 'revision',
'title': 'Revision ' + sha1_git,
'swh_id': swh_rev_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_rev_id}),
'show_options': show_ids_options
}
]
if snapshot_id:
swh_snp_id = get_swh_persistent_id('snapshot', snapshot_id)
swh_ids.append({
'object_type': 'snapshot',
'title': 'Snapshot ' + snapshot_id,
'swh_id': swh_snp_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_snp_id}),
'show_options': show_ids_options
})
return render(request, 'revision.html',
{'heading': 'Revision',
'swh_object_name': 'Revision',
'swh_object_metadata': revision_data,
'message_header': message_lines[0],
'message_body': '\n'.join(message_lines[1:]),
'parents_links': mark_safe(parents_links),
'snapshot_context': snapshot_context,
'dirs': dirs,
'files': files,
'content': content,
'content_size': content_size,
'max_content_size': content_display_max_size,
'mimetype': mimetype,
'language': language,
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'breadcrumbs': breadcrumbs,
'top_right_link': top_right_link,
'top_right_link_text': top_right_link_text,
'vault_cooking': vault_cooking,
'diff_revision_url': diff_revision_url,
'show_actions_menu': True,
'swh_ids': swh_ids})
diff --git a/swh/web/browse/views/utils/snapshot_context.py b/swh/web/browse/views/utils/snapshot_context.py
index 69f2e9bc..b8f97ae3 100644
--- a/swh/web/browse/views/utils/snapshot_context.py
+++ b/swh/web/browse/views/utils/snapshot_context.py
@@ -1,812 +1,812 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# Utility module implementing Django views for browsing the SWH archive
# in a snapshot context.
# Its purpose is to factorize code for the views reachable from the
# /origin/.* and /snapshot/.* endpoints.
from django.shortcuts import render, redirect
from django.utils.safestring import mark_safe
from django.template.defaultfilters import filesizeformat
from swh.web.browse.utils import (
get_snapshot_context, get_directory_entries, gen_directory_link,
gen_revision_link, request_content, gen_content_link,
prepare_content_for_display, content_display_max_size,
prepare_revision_log_for_display, gen_snapshot_directory_link,
gen_revision_log_link, gen_link, get_readme_to_display
)
from swh.web.common import service
from swh.web.common.exc import (
handle_view_exception, NotFoundExc
)
from swh.web.common.utils import (
reverse, gen_path_info, format_utc_iso_date,
get_swh_persistent_id
)
def _get_branch(branches, branch_name):
"""
Utility function to get a specific branch from a branches list.
Its purpose is to get the default HEAD branch as some SWH origin
(e.g those with svn type) does not have it. In that latter case, check
if there is a master branch instead and returns it.
"""
filtered_branches = \
[b for b in branches if b['name'].endswith(branch_name)]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif branch_name == 'HEAD':
filtered_branches = \
[b for b in branches if b['name'].endswith('master')]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif len(branches) > 0:
return branches[0]
return None
def _get_release(releases, release_name):
"""
Utility function to get a specific release from a releases list.
Returns None if the release can not be found in the list.
"""
filtered_releases = \
[r for r in releases if r['name'] == release_name]
if len(filtered_releases) > 0:
return filtered_releases[0]
else:
return None
def _branch_not_found(branch_type, branch, branches, snapshot_id=None,
origin_info=None, timestamp=None, visit_id=None):
"""
Utility function to raise an exception when a specified branch/release
can not be found.
"""
if branch_type == 'branch':
branch_type = 'Branch'
branch_type_plural = 'branches'
else:
branch_type = 'Release'
branch_type_plural = 'releases'
if snapshot_id and len(branches) == 0:
msg = 'Snapshot with id %s has an empty list' \
' of %s!' % (snapshot_id, branch_type_plural)
elif snapshot_id:
msg = '%s %s for snapshot with id %s' \
' not found!' % (branch_type, branch, snapshot_id)
elif visit_id and len(branches) == 0:
msg = 'Origin with type %s and url %s' \
' for visit with id %s has an empty list' \
' of %s!' % (origin_info['type'], origin_info['url'], visit_id,
branch_type_plural)
elif visit_id:
msg = '%s %s associated to visit with' \
' id %s for origin with type %s and url %s' \
' not found!' % (branch_type, branch, visit_id,
origin_info['type'], origin_info['url'])
elif len(branches) == 0:
msg = 'Origin with type %s and url %s' \
' for visit with timestamp %s has an empty list' \
' of %s!' % (origin_info['type'], origin_info['url'],
timestamp, branch_type_plural)
else:
msg = '%s %s associated to visit with' \
' timestamp %s for origin with type %s' \
' and url %s not found!' % (branch_type, branch, timestamp,
origin_info['type'],
origin_info['url'])
raise NotFoundExc(msg)
def _process_snapshot_request(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None,
browse_context='directory'):
"""
Utility function to perform common input request processing
for snapshot context views.
"""
visit_id = request.GET.get('visit_id', None)
snapshot_context = get_snapshot_context(snapshot_id, origin_type,
origin_url, timestamp, visit_id)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
branches = snapshot_context['branches']
releases = snapshot_context['releases']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
browse_view_name = 'browse-' + swh_type + '-' + browse_context
for b in branches:
branch_url_args = dict(url_args)
branch_query_params = dict(query_params)
branch_query_params['branch'] = b['name']
if path:
b['path'] = path
branch_url_args['path'] = path
b['url'] = reverse(browse_view_name,
kwargs=branch_url_args,
query_params=branch_query_params)
for r in releases:
release_url_args = dict(url_args)
release_query_params = dict(query_params)
release_query_params['release'] = r['name']
if path:
r['path'] = path
release_url_args['path'] = path
r['url'] = reverse(browse_view_name,
kwargs=release_url_args,
query_params=release_query_params)
root_sha1_git = None
revision_id = request.GET.get('revision', None)
release_name = request.GET.get('release', None)
branch_name = None
if revision_id:
revision = service.lookup_revision(revision_id)
root_sha1_git = revision['directory']
branches.append({'name': revision_id,
'revision': revision_id,
'directory': root_sha1_git,
'url': None})
branch_name = revision_id
query_params['revision'] = revision_id
elif release_name:
release = _get_release(releases, release_name)
if release:
root_sha1_git = release['directory']
revision_id = release['target']
query_params['release'] = release_name
else:
_branch_not_found("release", release_name, releases, snapshot_id,
origin_info, timestamp, visit_id)
else:
branch_name = request.GET.get('branch', None)
if branch_name:
query_params['branch'] = branch_name
branch = _get_branch(branches, branch_name or 'HEAD')
if branch:
branch_name = branch['name']
root_sha1_git = branch['directory']
revision_id = branch['revision']
else:
_branch_not_found("branch", branch_name, branches, snapshot_id,
origin_info, timestamp, visit_id)
snapshot_context['query_params'] = query_params
snapshot_context['root_sha1_git'] = root_sha1_git
snapshot_context['revision_id'] = revision_id
snapshot_context['branch'] = branch_name
snapshot_context['release'] = release_name
return snapshot_context
def browse_snapshot_directory(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None):
"""
Django view implementation for browsing a directory in a snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, path,
browse_context='directory') # noqa
root_sha1_git = snapshot_context['root_sha1_git']
sha1_git = root_sha1_git
if path:
dir_info = service.lookup_directory_with_path(root_sha1_git, path)
# some readme files can reference assets reachable from the
# browsed directory, handle that special case in order to
# correctly displayed them
if dir_info and dir_info['type'] == 'file':
file_raw_url = reverse(
'browse-content-raw',
kwargs={'query_string': dir_info['checksums']['sha1']})
return redirect(file_raw_url)
sha1_git = dir_info['target']
dirs, files = get_directory_entries(sha1_git)
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
revision_id = snapshot_context['revision_id']
snapshot_id = snapshot_context['snapshot_id']
path_info = gen_path_info(path)
browse_view_name = 'browse-' + swh_type + '-directory'
breadcrumbs = []
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse(browse_view_name,
kwargs=bc_url_args,
query_params=query_params)})
path = '' if path is None else (path + '/')
for d in dirs:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + d['name']
d['url'] = reverse(browse_view_name,
kwargs=bc_url_args,
query_params=query_params)
sum_file_sizes = 0
readmes = {}
browse_view_name = 'browse-' + swh_type + '-content'
for f in files:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + f['name']
f['url'] = reverse(browse_view_name,
kwargs=bc_url_args,
query_params=query_params)
sum_file_sizes += f['length']
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
browse_view_name = 'browse-' + swh_type + '-log'
history_url = reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)
sum_file_sizes = filesizeformat(sum_file_sizes)
browse_dir_link = \
gen_directory_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
browse_rev_link = \
gen_revision_link(revision_id,
snapshot_context=snapshot_context,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
dir_metadata = {'id': sha1_git,
'context-independent directory': browse_dir_link,
'number of regular files': len(files),
'number of subdirectories': len(dirs),
'sum of regular file sizes': sum_file_sizes,
'path': '/' + path,
'revision id': revision_id,
'revision': browse_rev_link,
'snapshot id': snapshot_id}
if origin_info:
dir_metadata['origin id'] = origin_info['id']
dir_metadata['origin type'] = origin_info['type']
dir_metadata['origin url'] = origin_info['url']
dir_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
dir_metadata['origin visit id'] = visit_info['visit']
snapshot_context_url = reverse('browse-snapshot-directory',
kwargs={'snapshot_id': snapshot_id},
query_params=request.GET)
browse_snapshot_link = \
gen_link(snapshot_context_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
dir_metadata['snapshot context'] = browse_snapshot_link
vault_cooking = {
'directory_context': True,
'directory_id': sha1_git,
'revision_context': True,
'revision_id': revision_id
}
swh_dir_id = get_swh_persistent_id('directory', sha1_git)
swh_snp_id = get_swh_persistent_id('snapshot', snapshot_id)
swh_ids = [
{
'object_type': 'directory',
'title': 'Directory ' + sha1_git,
'swh_id': swh_dir_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_dir_id}),
'show_options': snapshot_context['origin_info'] is not None
},
{
'object_type': 'snapshot',
'title': 'Snapshot ' + snapshot_id,
'swh_id': swh_snp_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_snp_id}),
'show_options': snapshot_context['origin_info'] is not None
}
]
return render(request, 'directory.html',
{'heading': 'Directory',
'swh_object_name': 'Directory',
'swh_object_metadata': dir_metadata,
'dirs': dirs,
'files': files,
'breadcrumbs': breadcrumbs,
'top_right_link': history_url,
'top_right_link_text': mark_safe(
''
'History'
),
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'snapshot_context': snapshot_context,
'vault_cooking': vault_cooking,
'show_actions_menu': True,
'swh_ids': swh_ids})
def browse_snapshot_content(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None):
"""
Django view implementation for browsing a content in a snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, path,
browse_context='content')
root_sha1_git = snapshot_context['root_sha1_git']
content_info = service.lookup_directory_with_path(root_sha1_git, path)
sha1_git = content_info['target']
query_string = 'sha1_git:' + sha1_git
content_data = request_content(query_string)
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
revision_id = snapshot_context['revision_id']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
snapshot_id = snapshot_context['snapshot_id']
content = None
language = None
if content_data['raw_data'] is not None:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
filename = None
path_info = None
browse_view_name = 'browse-' + swh_type + '-directory'
breadcrumbs = []
split_path = path.split('/')
filename = split_path[-1]
path_info = gen_path_info(path[:-len(filename)])
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse(browse_view_name,
kwargs=bc_url_args,
query_params=query_params)})
breadcrumbs.append({'name': filename,
'url': None})
browse_content_link = \
gen_content_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_raw_url = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params={'filename': filename})
browse_rev_link = \
gen_revision_link(revision_id,
snapshot_context=snapshot_context,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_metadata = {
'context-independent content': browse_content_link,
'sha1 checksum': content_data['checksums']['sha1'],
'sha1_git checksum': content_data['checksums']['sha1_git'],
'sha256 checksum': content_data['checksums']['sha256'],
'blake2s256 checksum': content_data['checksums']['blake2s256'],
'mime type': content_data['mimetype'],
'encoding': content_data['encoding'],
'size': filesizeformat(content_data['length']),
'language': content_data['language'],
'licenses': content_data['licenses'],
'path': '/' + path[:-len(filename)],
'filename': filename,
'revision id': revision_id,
'revision': browse_rev_link,
'snapshot id': snapshot_id
}
if origin_info:
content_metadata['origin id'] = origin_info['id']
content_metadata['origin type'] = origin_info['type']
content_metadata['origin url'] = origin_info['url']
content_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
content_metadata['origin visit id'] = visit_info['visit']
browse_snapshot_url = reverse('browse-snapshot-content',
kwargs={'snapshot_id': snapshot_id,
'path': path},
query_params=request.GET)
browse_snapshot_link = \
gen_link(browse_snapshot_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_metadata['snapshot context'] = browse_snapshot_link
cnt_sha1_git = content_data['checksums']['sha1_git']
swh_cnt_id = get_swh_persistent_id('content', cnt_sha1_git)
swh_snp_id = get_swh_persistent_id('snapshot', snapshot_id)
swh_ids = [
{
'object_type': 'content',
'title': 'Content ' + cnt_sha1_git,
'swh_id': swh_cnt_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id}),
'show_options': True
},
{
'object_type': 'snapshot',
'title': 'Snapshot ' + snapshot_id,
'swh_id': swh_snp_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_snp_id}),
'show_options': snapshot_context['origin_info'] is not None
}
]
return render(request, 'content.html',
{'heading': 'Content',
'swh_object_name': 'Content',
'swh_object_metadata': content_metadata,
'content': content,
'content_size': content_data['length'],
'max_content_size': content_display_max_size,
'mimetype': content_data['mimetype'],
'language': language,
'breadcrumbs': breadcrumbs,
'top_right_link': content_raw_url,
'top_right_link_text': mark_safe(
''
'Raw File'),
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': swh_ids})
PER_PAGE = 20
def browse_snapshot_log(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a revision history in a
snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, browse_context='log') # noqa
revision_id = snapshot_context['revision_id']
current_rev = revision_id
per_page = int(request.GET.get('per_page', PER_PAGE))
revs_breadcrumb = request.GET.get('revs_breadcrumb', None)
if revs_breadcrumb:
current_rev = revs_breadcrumb.split('/')[-1]
revision_log = service.lookup_revision_log(current_rev,
limit=per_page+1)
revision_log = list(revision_log)
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
snapshot_id = snapshot_context['snapshot_id']
query_params['per_page'] = per_page
revision_log_display_data = prepare_revision_log_for_display(
revision_log, per_page, revs_breadcrumb, snapshot_context)
browse_view_name = 'browse-' + swh_type + '-log'
prev_rev = revision_log_display_data['prev_rev']
prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb']
prev_log_url = None
query_params['revs_breadcrumb'] = prev_revs_breadcrumb
if prev_rev:
prev_log_url = \
reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)
next_rev = revision_log_display_data['next_rev']
next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb']
next_log_url = None
query_params['revs_breadcrumb'] = next_revs_breadcrumb
if next_rev:
next_log_url = \
reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)
revision_log_data = revision_log_display_data['revision_log_data']
for i, log in enumerate(revision_log_data):
params = {
'revision': revision_log[i]['id'],
}
if 'visit_id' in query_params:
params['visit_id'] = query_params['visit_id']
log['directory'] = gen_snapshot_directory_link(
snapshot_context, revision_log[i]['id'],
link_text=''
'Browse files',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
browse_log_link = \
gen_revision_log_link(revision_id, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_metadata = {
'context-independent revision history': browse_log_link,
'snapshot id': snapshot_id
}
if origin_info:
revision_metadata['origin id'] = origin_info['id']
revision_metadata['origin type'] = origin_info['type']
revision_metadata['origin url'] = origin_info['url']
revision_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
revision_metadata['origin visit id'] = visit_info['visit']
browse_snapshot_url = reverse('browse-snapshot-log',
kwargs={'snapshot_id': snapshot_id},
query_params=request.GET)
browse_snapshot_link = \
gen_link(browse_snapshot_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_metadata['snapshot context'] = browse_snapshot_link
swh_snp_id = get_swh_persistent_id('snapshot', snapshot_id)
swh_ids = [
{
'object_type': 'snapshot',
'title': 'Snapshot ' + snapshot_id,
'swh_id': swh_snp_id,
'swh_id_url': reverse('browse-swh-id',
kwargs={'swh_id': swh_snp_id}),
'show_options': snapshot_context['origin_info'] is not None
}
]
return render(request, 'revision-log.html',
{'heading': 'Revision history',
'swh_object_name': 'Revision history',
'swh_object_metadata': revision_metadata,
'revision_log': revision_log_data,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'top_right_link_text': None,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': swh_ids})
def browse_snapshot_branches(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a list of branches in a snapshot
context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp)
except Exception as exc:
return handle_view_exception(request, exc)
branches_offset = int(request.GET.get('branches_offset', 0))
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
browse_view_name = 'browse-' + swh_type + '-directory'
branches = snapshot_context['branches']
displayed_branches = \
branches[branches_offset:branches_offset+PER_PAGE]
for branch in displayed_branches:
if snapshot_id:
revision_url = reverse('browse-revision',
kwargs={'sha1_git': branch['revision']},
query_params={'snapshot_id': snapshot_id})
else:
revision_url = reverse('browse-revision',
kwargs={'sha1_git': branch['revision']},
query_params={'origin_type': origin_info['type'], # noqa
- 'origin_url': origin_info['url']}) # noqa
+ 'origin': origin_info['url']}) # noqa
query_params['branch'] = branch['name']
directory_url = reverse(browse_view_name,
kwargs=url_args,
query_params=query_params)
del query_params['branch']
branch['revision_url'] = revision_url
branch['directory_url'] = directory_url
browse_view_name = 'browse-' + swh_type + '-branches'
prev_branches_url = None
next_branches_url = None
next_offset = branches_offset + PER_PAGE
prev_offset = branches_offset - PER_PAGE
if next_offset < len(branches):
query_params['branches_offset'] = next_offset
next_branches_url = reverse(browse_view_name,
kwargs=url_args, query_params=query_params)
query_params['branches_offset'] = None
if prev_offset >= 0:
if prev_offset != 0:
query_params['branches_offset'] = prev_offset
prev_branches_url = reverse(browse_view_name,
kwargs=url_args, query_params=query_params)
return render(request, 'branches.html',
{'heading': 'Origin branches',
'swh_object_name': 'Branches',
'swh_object_metadata': {},
'top_right_link': None,
'top_right_link_text': None,
'displayed_branches': displayed_branches,
'prev_branches_url': prev_branches_url,
'next_branches_url': next_branches_url,
'snapshot_context': snapshot_context})
def browse_snapshot_releases(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a list of releases in a snapshot
context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp)
except Exception as exc:
return handle_view_exception(request, exc)
releases_offset = int(request.GET.get('releases_offset', 0))
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
releases = snapshot_context['releases']
displayed_releases = \
releases[releases_offset:releases_offset+PER_PAGE]
for release in displayed_releases:
if snapshot_id:
release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'snapshot_id': snapshot_id})
else:
release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'origin_type': origin_info['type'], # noqa
- 'origin_url': origin_info['url']}) # noqa
+ 'origin': origin_info['url']}) # noqa
query_params['release'] = release['name']
del query_params['release']
release['release_url'] = release_url
browse_view_name = 'browse-' + swh_type + '-releases'
prev_releases_url = None
next_releases_url = None
next_offset = releases_offset + PER_PAGE
prev_offset = releases_offset - PER_PAGE
if next_offset < len(releases):
query_params['releases_offset'] = next_offset
next_releases_url = reverse(browse_view_name,
kwargs=url_args, query_params=query_params)
query_params['releases_offset'] = None
if prev_offset >= 0:
if prev_offset != 0:
query_params['releases_offset'] = prev_offset
prev_releases_url = reverse(browse_view_name,
kwargs=url_args, query_params=query_params)
return render(request, 'releases.html',
{'heading': 'Origin releases',
'top_panel_visible': False,
'top_panel_collapsible': False,
'swh_object_name': 'Releases',
'swh_object_metadata': {},
'top_right_link': None,
'top_right_link_text': None,
'displayed_releases': displayed_releases,
'prev_releases_url': prev_releases_url,
'next_releases_url': next_releases_url,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': False})
diff --git a/swh/web/templates/browse-help.html b/swh/web/templates/browse-help.html
index e6c49910..cd332ef0 100644
--- a/swh/web/templates/browse-help.html
+++ b/swh/web/templates/browse-help.html
@@ -1,189 +1,189 @@
{% extends "browse-layout.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% block navbar-content %}
How to browse the archive ?
{% endblock %}
{% block browse-content %}
Overview
This web application aims to provide HTML views to easily navigate in the Software Heritage archive. This is an ongoing development
and new features and improvements will be progressively added over time.
URI scheme
The current URI scheme of that web application is described below and depends on the type of Software Heritage object to
browse. Its exhaustive documentation can be consulted from the official
Software Heritage development documentation
Context-independent browsing
Context-independent URLs provide information about objects (e.g., revisions, directories, contents, persons, …), independently
of the contexts where they have been found (e.g., specific software origins, branches, commits, …).
Below are some examples of endpoints used to just render the corresponding information for user consumption:
Where hyperlinks are created when browsing these kind of endpoints, they always point to other context-independent browsing
URLs.
Context-dependent browsing
Context-dependent URLs provide information about objects, limited to specific contexts where the objects have been found.
Currently, browsing the Software Heritage objects in the context of an
origin is available. Below are some examples of such endpoints:
Search software origins to browse
In order to facilitate the browsing of the archive and generate relevant entry points to it, a
search interface is available. Currently, it enables to search software origins from the URLs they were retrieved
from. More search criteria will be added in the future.
{% endblock %}
\ No newline at end of file
diff --git a/swh/web/templates/includes/snapshot-context.html b/swh/web/templates/includes/snapshot-context.html
index 5efe715a..46104be8 100644
--- a/swh/web/templates/includes/snapshot-context.html
+++ b/swh/web/templates/includes/snapshot-context.html
@@ -1,46 +1,46 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
diff --git a/swh/web/tests/browse/views/test_identifiers.py b/swh/web/tests/browse/views/test_identifiers.py
index c39df00e..fa0572bc 100644
--- a/swh/web/tests/browse/views/test_identifiers.py
+++ b/swh/web/tests/browse/views/test_identifiers.py
@@ -1,144 +1,144 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest
from django.test import TestCase
from swh.web.common.exc import BadInputExc
from swh.web.common.utils import reverse
from swh.web.tests.testbase import SWHWebTestBase
from .data.content_test_data import stub_content_text_data
from .data.directory_test_data import stub_root_directory_sha1
from .data.revision_test_data import revision_id_test
from .data.release_test_data import stub_release
swh_id_prefix = 'swh:1:'
class SwhBrowseIdTest(SWHWebTestBase, TestCase):
@istest
def content_id_browse(self):
cnt_sha1_git = stub_content_text_data['checksums']['sha1_git']
swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
query_string = 'sha1_git:' + cnt_sha1_git
content_browse_url = reverse('browse-content',
kwargs={'query_string': query_string})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], content_browse_url)
@istest
def directory_id_browse(self):
swh_id = swh_id_prefix + 'dir:' + stub_root_directory_sha1
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
directory_browse_url = reverse('browse-directory',
kwargs={'sha1_git': stub_root_directory_sha1})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], directory_browse_url)
@istest
def revision_id_browse(self):
swh_id = swh_id_prefix + 'rev:' + revision_id_test
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
revision_browse_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], revision_browse_url)
query_params = {'origin_type': 'git',
- 'origin_url': 'https://github.com/webpack/webpack'}
+ 'origin': 'https://github.com/webpack/webpack'}
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id},
query_params=query_params)
revision_browse_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], revision_browse_url)
@istest
def release_id_browse(self):
swh_id = swh_id_prefix + 'rel:' + stub_release['id']
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
release_browse_url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], release_browse_url)
query_params = {'origin_type': 'git',
- 'origin_url': 'https://github.com/python/cpython'}
+ 'origin': 'https://github.com/python/cpython'}
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id},
query_params=query_params)
release_browse_url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']},
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], release_browse_url)
@istest
def bad_id_browse(self):
swh_id = swh_id_prefix + 'foo:' + stub_release['id']
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 400)
@istest
def content_id_optional_parts_browse(self):
cnt_sha1_git = stub_content_text_data['checksums']['sha1_git']
optional_parts = ';lines=4-20;origin=https://github.com/user/repo'
swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git + optional_parts
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
query_string = 'sha1_git:' + cnt_sha1_git
content_browse_url = reverse('browse-content',
kwargs={'query_string': query_string},
query_params={'origin' : 'https://github.com/user/repo'})
content_browse_url += '#L4-L20'
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
self.assertEqual(resp['location'], content_browse_url)
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
index a1607111..413c12f8 100644
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -1,833 +1,833 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest, nottest
from django.test import TestCase
from django.utils.html import escape
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, gen_path_info, format_utc_iso_date,
parse_timestamp, get_swh_persistent_id
)
from swh.web.tests.testbase import SWHWebTestBase
from .data.origin_test_data import (
origin_info_test_data,
origin_visits_test_data,
stub_content_origin_info, stub_content_origin_visit_id,
stub_content_origin_visit_unix_ts, stub_content_origin_visit_iso_date,
stub_content_origin_branch,
stub_content_origin_visits, stub_content_origin_snapshot,
stub_origin_info, stub_visit_id,
stub_origin_visits, stub_origin_snapshot,
stub_origin_root_directory_entries, stub_origin_master_branch,
stub_origin_root_directory_sha1, stub_origin_sub_directory_path,
stub_origin_sub_directory_entries, stub_visit_unix_ts, stub_visit_iso_date
)
from .data.content_test_data import (
stub_content_root_dir,
stub_content_text_data,
stub_content_text_path
)
stub_origin_info_no_type = dict(stub_origin_info)
stub_origin_info_no_type['type'] = None
class SwhBrowseOriginTest(SWHWebTestBase, TestCase):
@patch('swh.web.browse.views.origin.get_origin_info')
@patch('swh.web.browse.views.origin.get_origin_visits')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_visits_browse(self, mock_service, mock_get_origin_visits,
mock_get_origin_info):
mock_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_info.return_value = origin_info_test_data
mock_get_origin_visits.return_value = origin_visits_test_data
url = reverse('browse-origin-visits',
kwargs={'origin_type': origin_info_test_data['type'],
'origin_url': origin_info_test_data['url']})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('origin-visits.html')
url = reverse('browse-origin-visits',
kwargs={'origin_url': origin_info_test_data['url']})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('origin-visits.html')
@nottest
def origin_content_view_test(self, origin_info, origin_visits,
origin_branches, origin_releases,
origin_branch,
root_dir_sha1, content_sha1, content_sha1_git,
content_path, content_data,
content_language,
visit_id=None, timestamp=None):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url'],
'path': content_path}
if not visit_id:
visit_id = origin_visits[-1]['visit']
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
if visit_id:
query_params['visit_id'] = visit_id
url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('content.html')
self.assertContains(resp, '' % content_language)
self.assertContains(resp, escape(content_data))
split_path = content_path.split('/')
filename = split_path[-1]
path = content_path.replace(filename, '')[:-1]
path_info = gen_path_info(path)
del url_args['path']
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
root_dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '',
count=len(path_info)+1)
self.assertContains(resp, '%s' %
(root_dir_url, root_dir_sha1[:7]))
for p in path_info:
url_args['path'] = p['path']
dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '%s' %
(dir_url, p['name']))
self.assertContains(resp, '%s' % filename)
query_string = 'sha1_git:' + content_sha1
url_raw = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params={'filename': filename})
self.assertContains(resp, url_raw)
del url_args['path']
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '',
count=len(origin_branches))
url_args['path'] = content_path
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_branch_url)
self.assertContains(resp, '',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_release_url)
del url_args['origin_type']
url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('content.html')
swh_cnt_id = get_swh_persistent_id('content', content_sha1_git)
swh_cnt_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id})
self.assertContains(resp, swh_cnt_id)
self.assertContains(resp, swh_cnt_id_url)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.utils.snapshot_context.request_content')
@istest
def origin_content_view(self, mock_request_content, mock_utils_service,
mock_service, mock_get_origin_visit_snapshot,
mock_get_origin_visits):
stub_content_text_sha1 = stub_content_text_data['checksums']['sha1']
stub_content_text_sha1_git = stub_content_text_data['checksums']['sha1_git']
mock_get_origin_visits.return_value = stub_content_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
mock_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_sha1}
mock_request_content.return_value = stub_content_text_data
mock_utils_service.lookup_origin.return_value = stub_content_origin_info
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp')
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
visit_id=stub_content_origin_visit_id)
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_unix_ts)
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_iso_date)
@nottest
def origin_directory_view(self, origin_info, origin_visits,
origin_branches, origin_releases, origin_branch,
root_directory_sha1, directory_entries,
visit_id=None, timestamp=None, path=None):
dirs = [e for e in directory_entries
if e['type'] == 'dir']
files = [e for e in directory_entries
if e['type'] == 'file']
if not visit_id:
visit_id = origin_visits[-1]['visit']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
else:
query_params['visit_id'] = visit_id
if path:
url_args['path'] = path
url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('directory.html')
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('directory.html')
self.assertContains(resp, '',
count=len(dirs))
self.assertContains(resp, ' | ',
count=len(files))
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
for d in dirs:
dir_path = d['name']
if path:
dir_path = "%s/%s" % (path, d['name'])
dir_url_args = dict(url_args)
dir_url_args['path'] = dir_path
dir_url = reverse('browse-origin-directory',
kwargs=dir_url_args,
query_params=query_params)
self.assertContains(resp, dir_url)
for f in files:
file_path = f['name']
if path:
file_path = "%s/%s" % (path, f['name'])
file_url_args = dict(url_args)
file_url_args['path'] = file_path
file_url = reverse('browse-origin-content',
kwargs=file_url_args,
query_params=query_params)
self.assertContains(resp, file_url)
if 'path' in url_args:
del url_args['path']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
nb_bc_paths = 1
if path:
nb_bc_paths = len(path.split('/')) + 1
self.assertContains(resp, '', count=nb_bc_paths)
self.assertContains(resp, '%s' %
(root_dir_branch_url,
root_directory_sha1[:7]))
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
if path:
url_args['path'] = path
self.assertContains(resp, '',
count=len(origin_branches))
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_branch_url)
self.assertContains(resp, '',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_release_url)
self.assertContains(resp, 'vault-cook-directory')
self.assertContains(resp, 'vault-cook-revision')
swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
swh_dir_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_dir_id})
self.assertContains(resp, swh_dir_id)
self.assertContains(resp, swh_dir_id_url)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def test_origin_root_directory_view(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_root_directory_entries
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
visit_id=stub_visit_id)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_unix_ts)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_iso_date)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
visit_id=stub_visit_id)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_unix_ts)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_iso_date)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@istest
def origin_sub_directory_view(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_sub_directory_entries
mock_origin_service.lookup_directory_with_path.return_value = \
{'target': stub_origin_sub_directory_entries[0]['dir_id'],
'type' : 'dir'}
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
visit_id=stub_visit_id,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_unix_ts,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_iso_date,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
visit_id=stub_visit_id,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_unix_ts,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_iso_date,
path=stub_origin_sub_directory_path)
@patch('swh.web.browse.views.utils.snapshot_context.request_content')
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.views.origin.get_origin_info')
@istest
def test_origin_request_errors(self, mock_get_origin_info,
mock_snapshot_service,
mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits,
mock_request_content):
mock_get_origin_info.side_effect = \
NotFoundExc('origin not found')
url = reverse('browse-origin-visits',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'origin not found', status_code=404)
mock_utils_service.lookup_origin.side_effect = None
mock_utils_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = None
mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.side_effect = \
NotFoundExc('Directory not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Directory not found', status_code=404)
mock_origin_service.lookup_origin.side_effect = None
mock_origin_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = None
mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_snapshot_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_data['checksums']['sha1']}
mock_request_content.side_effect = \
NotFoundExc('Content not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Content not found', status_code=404)
@nottest
def origin_branches_test(self, origin_info, origin_snapshot):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
url = reverse('browse-origin-branches',
kwargs=url_args)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('branches.html')
origin_branches = origin_snapshot[0]
origin_releases = origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '',
count=len(origin_branches))
for branch in origin_branches:
browse_branch_url = reverse('browse-origin-directory',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'branch': branch['name']})
self.assertContains(resp, '%s' % (escape(browse_branch_url), branch['name']))
browse_revision_url = reverse('browse-revision',
kwargs={'sha1_git': branch['revision']},
query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ 'origin': origin_info['url']})
self.assertContains(resp, '%s' % (escape(browse_revision_url), branch['revision'][:7]))
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_branches(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_branches_test(stub_origin_info, stub_origin_snapshot)
self.origin_branches_test(stub_origin_info_no_type, stub_origin_snapshot)
@nottest
def origin_releases_test(self, origin_info, origin_snapshot):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
url = reverse('browse-origin-releases',
kwargs=url_args)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('releases.html')
origin_branches = origin_snapshot[0]
origin_releases = origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, ' ',
count=len(origin_releases))
for release in origin_releases:
browse_release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ 'origin': origin_info['url']})
self.assertContains(resp, '%s' % (escape(browse_release_url), release['name']))
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_releases(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_releases_test(stub_origin_info, stub_origin_snapshot)
self.origin_releases_test(stub_origin_info_no_type, stub_origin_snapshot)
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
index ddb7044b..738b743c 100644
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -1,116 +1,114 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest
from django.test import TestCase
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
from swh.web.tests.testbase import SWHWebTestBase
from .data.release_test_data import (
stub_release
)
from .data.origin_test_data import stub_origin_visits
class SwhBrowseReleaseTest(SWHWebTestBase, TestCase):
@patch('swh.web.browse.views.release.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.common.utils.service')
@istest
def release_browse(self, mock_service_common, mock_service_utils,
mock_service):
mock_service.lookup_release.return_value = stub_release
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']})
release_id = stub_release['id']
release_name = stub_release['name']
author_id = stub_release['author']['id']
author_name = stub_release['author']['name']
author_url = reverse('browse-person',
kwargs={'person_id': author_id})
release_date = stub_release['date']
message = stub_release['message']
target_type = stub_release['target_type']
target = stub_release['target']
target_url = reverse('browse-revision', kwargs={'sha1_git': target})
message_lines = stub_release['message'].split('\n')
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('release.html')
self.assertContains(resp, '%s' %
(author_url, author_name))
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '%s%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
self.assertContains(resp, '%s' %
(target_url, target))
swh_rel_id = get_swh_persistent_id('release', release_id)
swh_rel_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_rel_id})
self.assertContains(resp, swh_rel_id)
self.assertContains(resp, swh_rel_id_url)
origin_info = {
'id': 13706355,
'type': 'git',
'url': 'https://github.com/python/cpython'
}
mock_service_utils.lookup_origin.return_value = origin_info
mock_service_common.lookup_origin_visits.return_value = stub_origin_visits
mock_service_common.MAX_LIMIT = 20
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']},
- query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ query_params={'origin': origin_info['url']})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('release.html')
self.assertContains(resp, author_url)
self.assertContains(resp, author_name)
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '%s%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
target_url = reverse('browse-revision', kwargs={'sha1_git': target},
- query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ query_params={'origin': origin_info['url']})
self.assertContains(resp, '%s' % (target_url, target))
mock_service.lookup_release.side_effect = \
NotFoundExc('Release not found')
url = reverse('browse-release',
kwargs={'sha1_git': 'ffff'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Release not found', status_code=404)
diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py
index 625150f0..6cb5aa3b 100644
--- a/swh/web/tests/browse/views/test_revision.py
+++ b/swh/web/tests/browse/views/test_revision.py
@@ -1,272 +1,272 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest
from django.test import TestCase
from django.utils.html import escape
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
from swh.web.tests.testbase import SWHWebTestBase
from .data.revision_test_data import (
revision_id_test, revision_metadata_test,
revision_history_log_test
)
from .data.origin_test_data import stub_origin_visits
class SwhBrowseRevisionTest(SWHWebTestBase, TestCase):
@patch('swh.web.browse.views.revision.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.common.utils.service')
@istest
def revision_browse(self, mock_service_common, mock_service_utils,
mock_service):
mock_service.lookup_revision.return_value = revision_metadata_test
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
author_id = revision_metadata_test['author']['id']
author_name = revision_metadata_test['author']['name']
committer_id = revision_metadata_test['committer']['id']
committer_name = revision_metadata_test['committer']['name']
dir_id = revision_metadata_test['directory']
author_url = reverse('browse-person',
kwargs={'person_id': author_id})
committer_url = reverse('browse-person',
kwargs={'person_id': committer_id})
directory_url = reverse('browse-directory',
kwargs={'sha1_git': dir_id})
history_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('revision.html')
self.assertContains(resp, '%s' %
(author_url, author_name))
self.assertContains(resp, '%s' %
(committer_url, committer_name))
self.assertContains(resp, directory_url)
self.assertContains(resp, history_url)
for parent in revision_metadata_test['parents']:
parent_url = reverse('browse-revision',
kwargs={'sha1_git': parent})
self.assertContains(resp, '%s' %
(parent_url, parent))
author_date = revision_metadata_test['date']
committer_date = revision_metadata_test['committer_date']
message_lines = revision_metadata_test['message'].split('\n')
self.assertContains(resp, format_utc_iso_date(author_date))
self.assertContains(resp, format_utc_iso_date(committer_date))
self.assertContains(resp, message_lines[0])
self.assertContains(resp, '\n'.join(message_lines[1:]))
origin_info = {
'id': '7416001',
'type': 'git',
'url': 'https://github.com/webpack/webpack'
}
mock_service_utils.lookup_origin.return_value = origin_info
mock_service_common.lookup_origin_visits.return_value = stub_origin_visits
mock_service_common.MAX_LIMIT = 20
origin_directory_url = reverse('browse-origin-directory',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'revision': revision_id_test})
origin_revision_log_url = reverse('browse-origin-log',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'revision': revision_id_test})
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ 'origin': origin_info['url']})
resp = self.client.get(url)
self.assertContains(resp, origin_directory_url)
self.assertContains(resp, origin_revision_log_url)
for parent in revision_metadata_test['parents']:
parent_url = reverse('browse-revision',
kwargs={'sha1_git': parent},
query_params={'origin_type': origin_info['type'],
- 'origin_url': origin_info['url']})
+ 'origin': origin_info['url']})
self.assertContains(resp, '%s' %
(parent_url, parent))
self.assertContains(resp, 'vault-cook-directory')
self.assertContains(resp, 'vault-cook-revision')
swh_rev_id = get_swh_persistent_id('revision', revision_id_test)
swh_rev_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_rev_id})
self.assertContains(resp, swh_rev_id)
self.assertContains(resp, swh_rev_id_url)
@patch('swh.web.browse.views.revision.service')
@istest
def revision_log_browse(self, mock_service):
per_page = 10
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[:per_page+1]
url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test},
query_params={'per_page': per_page})
resp = self.client.get(url)
prev_rev = revision_history_log_test[per_page]['id']
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': revision_id_test,
'per_page': per_page})
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('revision-log.html')
self.assertContains(resp, ' ',
count=per_page)
self.assertContains(resp, 'Newer')
self.assertContains(resp, 'Older' %
escape(next_page_url))
for log in revision_history_log_test[:per_page]:
author_url = reverse('browse-person',
kwargs={'person_id': log['author']['id']})
revision_url = reverse('browse-revision',
kwargs={'sha1_git': log['id']})
directory_url = reverse('browse-directory',
kwargs={'sha1_git': log['directory']})
self.assertContains(resp, '%s' %
(author_url, log['author']['name']))
self.assertContains(resp, '%s' %
(revision_url, log['id'][:7]))
self.assertContains(resp, directory_url)
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[per_page:2*per_page+1]
resp = self.client.get(next_page_url)
prev_prev_rev = revision_history_log_test[2*per_page]['id']
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test},
query_params={'per_page': per_page})
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev,
'per_page': per_page})
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('revision-log.html')
self.assertContains(resp, ' ',
count=per_page)
self.assertContains(resp, 'Newer' %
escape(prev_page_url))
self.assertContains(resp, 'Older' %
escape(next_page_url))
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[2*per_page:3*per_page+1]
resp = self.client.get(next_page_url)
prev_prev_prev_rev = revision_history_log_test[3*per_page]['id']
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': revision_id_test,
'per_page': per_page})
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev + '/' + prev_prev_rev,
'per_page': per_page})
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('revision-log.html')
self.assertContains(resp, ' ',
count=per_page)
self.assertContains(resp, 'Newer' %
escape(prev_page_url))
self.assertContains(resp, 'Older' %
escape(next_page_url))
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[3*per_page:3*per_page+per_page//2]
resp = self.client.get(next_page_url)
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev,
'per_page': per_page})
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('revision-log.html')
self.assertContains(resp, ' ',
count=per_page//2)
self.assertContains(resp, 'Older')
self.assertContains(resp, 'Newer' %
escape(prev_page_url))
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.revision.service')
@istest
def revision_request_errors(self, mock_service, mock_utils_service):
mock_service.lookup_revision.side_effect = \
NotFoundExc('Revision not found')
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Revision not found', status_code=404)
mock_service.lookup_revision_log.side_effect = \
NotFoundExc('Revision not found')
url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Revision not found', status_code=404)
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params={'origin_type': 'git',
- 'origin_url': 'https://github.com/foo/bar'})
+ 'origin': 'https://github.com/foo/bar'})
mock_service.lookup_revision.side_effect = None
mock_utils_service.lookup_origin.side_effect = \
NotFoundExc('Origin not found')
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Origin not found', status_code=404)
|